Source code for hysop.operator.min_max

# Copyright (c) HySoP 2011-2024
#
# This file is part of HySoP software.
# See "https://particle_methods.gricad-pages.univ-grenoble-alpes.fr/hysop-doc/"
# for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


"""
@file min_max.py
MinMaxFieldStatistics: compute min(f), max(f) and/or max(|f|) for a given field f.
MinMaxDerivativeStatistics: compute min(d^k(Fi)/dXj^k), max(d^kFi/dXj^k) and/or max(|dFi/dXj|)
                            for a given field, component, direction and order.
MinMaxGradientStatistics: compute min(dFi/dXj), max(dFi/dXj) and/or max(|dFi/dXj|)
                          for a given field, up to all components in all directions.
"""
from hysop import vprint
from hysop.constants import Backend, Implementation
from hysop.tools.htypes import check_instance, first_not_None, to_tuple
from hysop.tools.henum import EnumFactory
from hysop.tools.decorators import debug
from hysop.tools.numpywrappers import npw
from hysop.tools.sympy_utils import nabla, partial, subscript
from hysop.fields.continuous_field import Field
from hysop.topology.cartesian_descriptor import CartesianTopologyDescriptors
from hysop.parameters.scalar_parameter import ScalarParameter
from hysop.parameters.tensor_parameter import TensorParameter
from hysop.core.graph.computational_operator import ComputationalGraphOperator
from hysop.core.graph.computational_node_frontend import ComputationalGraphNodeFrontend
from hysop.core.graph.graph import op_apply
from hysop.operator.base.min_max import (
    MinMaxFieldStatisticsBase,
    MinMaxDerivativeStatisticsBase,
)


[docs] class MinMaxFieldStatistics(ComputationalGraphNodeFrontend): """ Operator frontend to compute min and max statistics on the specific field. """
[docs] @classmethod def implementations(cls): from hysop.backend.host.python.operator.min_max import ( PythonMinMaxFieldStatistics, ) from hysop.backend.device.opencl.operator.min_max import ( OpenClMinMaxFieldStatistics, ) implementations = { Implementation.PYTHON: PythonMinMaxFieldStatistics, Implementation.OPENCL: OpenClMinMaxFieldStatistics, } return implementations
[docs] @classmethod def default_implementation(cls): return Implementation.PYTHON
@debug def __new__( cls, field, components=None, coeffs=None, Fmin=None, Fmax=None, Finf=None, all_quiet=False, name=None, pbasename=None, ppbasename=None, variables=None, implementation=None, base_kwds=None, **kwds, ): return super().__new__( cls, field=field, components=components, coeffs=coeffs, Fmin=Fmin, Fmax=Fmax, Finf=Finf, all_quiet=all_quiet, name=name, pbasename=pbasename, ppbasename=ppbasename, variables=variables, implementation=implementation, base_kwds=base_kwds, **kwds, ) @debug def __init__( self, field, components=None, coeffs=None, Fmin=None, Fmax=None, Finf=None, all_quiet=False, name=None, pbasename=None, ppbasename=None, variables=None, implementation=None, base_kwds=None, **kwds, ): """ Initialize a MinMaxFieldStatistics operator frontend. Available operator backends are PYTHON and OPENCL. MinMaxFieldStatistics can compute some commonly required Field statistics: Fmin: component-wise min values of the field. Fmax: component-wise max values of the field. Finf: component-wise max values of the absolute value of the field (computed using Fmin and Fmax). All statistics are only computed if explicitely requested by user, unless required to compute another user-required statistic, see Notes. All statistics may also be additionaly scaled by a coefficient. Compute vectorized statistics: Fmin = Smin * min(F[components]) Fmax = Smax * max(F[components]) Finf = Sinf * max(|Fmin|, |Fmax|) where F is an input field Fmin = created or supplied TensorParameter. Fmax = created or supplied TensorParameter. Finf = created or supplied TensorParameter. Smin = coeffs['Fmin'] Smax = coeffs['Fmax'] Sinf = coeffs['Finf'] Parameters ---------- field: Field The continuous field on which the statistics will be computed. components: array like of ints, optional The components on which the statistics are computed, defaults to all components. coeffs: dict of array like of coefficients, optional Optional scaling of the statistics. Scaling factor should be a scalar or an array-like of scalars for each components. If not given, defaults to 1 for all statistics. F...: TensorParameter or boolean, optional At least one statistic should be specified (either by boolean or TensorParameter). TensorParameters should be of shape (nb_components,), see Notes. If set to True, the TensorParameter will be generated automatically. Autogenerated TensorParameters that are not required by the user (ie. left to None or False during __init__) are, if required, generated but set to be quiet. all_quiet: bool Set all autogenerated TensorParameter to be quiet. name: str, optional Name of this operator. pbasename: str, optional Parameters basename for created parameters. Defaults to field.name. ppbasename: str, optional Parameters pretty basename for created parameters. Defaults to pbasename. variables: dict, optional Dictionary of fields as keys and topologies as values. implementation: hysop.constants.Implementation, optional Specify underlying backend implementation. Target implementation, should be contained in available_implementations(). If None, implementation will be set to default_implementation(). base_kwds: dict, optional Base class keyword arguments as a dictionnary. kwds: Extra keyword arguments passed towards operator backend implementation. Attributes: ----------- F...: TensorParameter All generated tensor parameters. Unused statistics are set to None. Notes ----- nb_components = min(field.nb_components, len(components)). About statistics: Finf requires to compute Fmin and Fmax and will have value: Finf = Sinf * max( abs(Smin*Fmin), abs(Smax*Fmax)) where Sinf, Smin and Smax are the scaling coefficients defined in coeffs. """ check_instance(field, Field) check_instance( components, (tuple, list, npw.ndarray), values=int, allow_none=True, min_value=0, max_value=field.nb_components - 1, ) check_instance( coeffs, dict, keys=str, values=(float, tuple, list, npw.ndarray), allow_none=True, ) check_instance( variables, dict, keys=Field, values=CartesianTopologyDescriptors, allow_none=True, ) check_instance(name, str, allow_none=True) check_instance(pbasename, str, allow_none=True) check_instance(ppbasename, str, allow_none=True) check_instance(implementation, Implementation, allow_none=True) check_instance(base_kwds, dict, keys=str, allow_none=True) if ( ((Fmin is None) or (Fmin is False)) and ((Fmax is None) or (Fmax is False)) and ((Finf is None) or (Finf is False)) ): msg = "No statistics were requested." msg += "\nPlease specify Fmin, Fmax and/or Finf by either setting " msg += " their value to True, or by by passing an already existing " msg += " tensor parameter." raise ValueError(msg) # Pregenerate parameters so that we can directly store them in self. parameters = MinMaxFieldStatisticsBase.build_parameters( field=field, components=components, all_quiet=all_quiet, Fmin=Fmin, Fmax=Fmax, Finf=Finf, pbasename=pbasename, ppbasename=ppbasename, ) (Fmin, Fmax, Finf) = tuple(parameters[k] for k in ("Fmin", "Fmax", "Finf")) super().__init__( field=field, components=components, coeffs=coeffs, Fmin=Fmin, Fmax=Fmax, Finf=Finf, all_quiet=all_quiet, name=name, pbasename=pbasename, ppbasename=ppbasename, variables=variables, implementation=implementation, base_kwds=base_kwds, **kwds, ) self.Fmin, self.Fmax, self.Finf = (Fmin, Fmax, Finf)
[docs] class MinMaxDerivativeStatistics(ComputationalGraphNodeFrontend): """ Operator frontend to compute min and max statistics on a specific derivative of a scalar field, without keeping its output. """
[docs] @classmethod def implementations(cls): raise NotImplementedError
[docs] @classmethod def default_implementation(cls): raise NotImplementedError
@debug def __new__( cls, F, dF=None, A=None, derivative=None, direction=None, Fmin=None, Fmax=None, Finf=None, coeffs=None, all_quiet=False, name=None, pbasename=None, ppbasename=None, variables=None, implementation=None, base_kwds=None, **kwds, ): return super().__new__( cls, F=F, dF=dF, A=A, derivative=derivative, direction=direction, Fmin=Fmin, Fmax=Fmax, Finf=Finf, coeffs=coeffs, all_quiet=all_quiet, name=name, pbasename=pbasename, ppbasename=ppbasename, variables=variables, implementation=implementation, base_kwds=base_kwds, **kwds, ) @debug def __init__( self, F, dF=None, A=None, derivative=None, direction=None, Fmin=None, Fmax=None, Finf=None, coeffs=None, all_quiet=False, name=None, pbasename=None, ppbasename=None, variables=None, implementation=None, base_kwds=None, **kwds, ): """ Initialize a MinMaxDerivativeStatistics operator frontend. Available operator backends are PYTHON and OPENCL. MinMaxDerivativeStatistics can compute some commonly required Field derivative statistics: Fmin: min value of a derivative of the field. Fmax: max value of a derivative of the field. Finf: max value of the absolute value of a derivative of the field (computed using Fmin and Fmax). First compute the derivative of a scalar field F in a given direction at a given order and on a given backend out of place in scalar field dF The derivative is then possibly scaled by another field/parameter/value A. After the scaled derivative has been computed, compute user requested statistics (min and max values) on this new field and scale those statistics by other scaling parameters stored in coeffs. 1) Compute derivative dF[k] = alpha * d^n(F[i])/dXj**n 2) Compute statistics Fmin = Smin * min(dF) Fmax = Smax * max(dF) Finf = Sinf * max(|Fmin|, |Fmax|) where F is an input field dF is an output field (by default a temporary field). n = derivative order > 0 alpha = A, where A is a Field, a Parameter or a scalar. Fmin = created or supplied TensorParameter. Fmax = created or supplied TensorParameter. Finf = created or supplied TensorParameter. Smin = coeffs['Fmin'] Smax = coeffs['Fmax'] Sinf = coeffs['Finf'] Statistics are only computed if explicitely requested by user, unless required to compute another user-required statistic, see Notes. Parameters ---------- F: hysop.field.continuous_field.Field Continuous field as input. dF: hysop.field.continuous_field.Field, optional Continuous field to be written. Some backend may allow inplace differentiation. By default a temporary scalar field is created, which means that the computation of the derivative may be discarded after this operator has been applied because of temporary buffer sharing between operators. A: numerical value, ScalarParameter or Field, optional Scaling field/parameter/value for convenience. Defaults to no scaling. derivative: int, optional Which derivative to generate. Defaults to 1. direction: int, optional Directions in which to take the derivative. Defaults to 0. F...: TensorParameter or boolean, optional The output parameters that will contain the statistics. At least one statistic should be specified (either by boolean or TensorParameter). TensorParameters should be of shape (1,). If set to True, the TensorParameter will be generated automatically. Autogenerated TensorParameters that are not required by the user (ie. left to None or False during __init__) are, if required, generated but set to be quiet. Autogenerated parameters can be retrieved using the 'Fmin', 'Fmax' and 'Finf' attributes. all_quiet: bool Set all autogenerated TensorParameter to be quiet. coeffs: dict of array like of coefficients, optional Optional scaling of the statistics. Scaling factor should be a scalar or an array-like of scalars for each components. If not given, defaults to 1 for all statistics. name: str, optional Name of this operator. pbasename: str, optional Parameters basename for created parameters. Defaults to field.name. ppbasename: str, optional Parameters pretty basename for created parameters. Defaults to pbasename. variables: dict, optional Dictionary of fields as keys and topologies as values. implementation: hysop.constants.Implementation, optional Specify underlying backend implementation. Target implementation, should be contained in available_implementations(). If None, implementation will be set to default_implementation(). base_kwds: dict, optional Base class keyword arguments as a dictionnary. kwds: Extra keyword arguments passed towards operator backend implementation. Attributes: ----------- F...: TensorParameter All generated tensor parameters. Unused statistics are set to None. Notes ----- About statistics: Finf requires to compute Fmin and Fmax and will have value: Finf = Sinf * max( abs(Smin*Fmin), abs(Smax*Fmax)) where Sinf, Smin and Smax are the scaling coefficients defined in coeffs. """ check_instance(F, Field) check_instance(dF, Field, allow_none=True) check_instance(derivative, int, allow_none=True) check_instance(direction, int, allow_none=True) check_instance( coeffs, dict, keys=str, values=(int, float, npw.number), allow_none=True ) check_instance( variables, dict, keys=Field, values=CartesianTopologyDescriptors, allow_none=True, ) check_instance(name, str, allow_none=True) check_instance(pbasename, str, allow_none=True) check_instance(ppbasename, str, allow_none=True) check_instance(implementation, Implementation, allow_none=True) check_instance(base_kwds, dict, keys=str, allow_none=True) if ( ((Fmin is None) or (Fmin is False)) and ((Fmax is None) or (Fmax is False)) and ((Finf is None) or (Finf is False)) ): msg = "No statistics were requested." msg += "\nPlease specify Fmin, Fmax and/or Finf by either setting " msg += " their value to True, or by by passing an already existing " msg += " tensor parameter." raise ValueError(msg) # Pregenerate parameters so that we can directly store them in self. parameters = MinMaxDerivativeStatisticsBase.build_parameters( field=F, all_quiet=all_quiet, components=tuple(range(F.nb_components)), Fmin=Fmin, Fmax=Fmax, Finf=Finf, pbasename=pbasename, ppbasename=ppbasename, ) (Fmin, Fmax, Finf) = tuple(parameters[k] for k in ("Fmin", "Fmax", "Finf")) super().__init__( F=F, dF=dF, A=A, derivative=derivative, direction=direction, Fmin=Fmin, Fmax=Fmax, Finf=Finf, coeffs=coeffs, all_quiet=all_quiet, name=name, pbasename=pbasename, ppbasename=ppbasename, variables=variables, implementation=implementation, base_kwds=base_kwds, **kwds, ) self.Fmin, self.Fmax, self.Finf = (Fmin, Fmax, Finf)
[docs] class MinMaxSpectralDerivativeStatistics(MinMaxDerivativeStatistics): """ Operator frontend to compute min and max statistics on a specific derivative of a scalar field using the spectral method. """
[docs] @classmethod def implementations(cls): from hysop.backend.host.python.operator.min_max import ( PythonMinMaxSpectralDerivativeStatistics, ) from hysop.backend.device.opencl.operator.min_max import ( OpenClMinMaxSpectralDerivativeStatistics, ) implementations = { Implementation.PYTHON: PythonMinMaxSpectralDerivativeStatistics, Implementation.OPENCL: OpenClMinMaxSpectralDerivativeStatistics, } return implementations
[docs] @classmethod def default_implementation(cls): return Implementation.PYTHON
[docs] class MinMaxFiniteDifferencesDerivativeStatistics(MinMaxDerivativeStatistics): """ Operator frontend to compute min and max statistics on a specific derivative of a scalar field using finite differences. """
[docs] @classmethod def implementations(cls): from hysop.backend.host.python.operator.min_max import ( PythonMinMaxFiniteDifferencesDerivativeStatistics, ) from hysop.backend.device.opencl.operator.min_max import ( OpenClMinMaxFiniteDifferencesDerivativeStatistics, ) implementations = { Implementation.PYTHON: PythonMinMaxFiniteDifferencesDerivativeStatistics, Implementation.OPENCL: OpenClMinMaxFiniteDifferencesDerivativeStatistics, } return implementations
[docs] @classmethod def default_implementation(cls): return Implementation.PYTHON